$macro!(config_for_key)
$macro!(config_list)
$macro!(doc)
+ $macro!(fetch)
$macro!(generate_lockfile)
$macro!(git_checkout)
$macro!(locate_project)
--- /dev/null
+use docopt;
+
+use cargo::ops;
+use cargo::core::{MultiShell};
+use cargo::util::{CliResult, CliError};
+use cargo::util::important_paths::find_root_manifest_for_cwd;
+
+docopt!(Options, "
+Fetch dependencies of a package from the network.
+
+Usage:
+ cargo fetch [options]
+
+Options:
+ -h, --help Print this message
+ --manifest-path PATH Path to the manifest to fetch dependencies for
+ -v, --verbose Use verbose output
+
+If a lockfile is available, this command will ensure that all of the git
+dependencies and/or registries dependencies are downloaded and locally
+available. The network is never touched after a `cargo fetch` unless
+the lockfile changes.
+
+If the lockfile is not available, then this is the equivalent of
+`cargo generate-lockfile`. A lockfile is generated and dependencies are also
+all updated.
+", flag_manifest_path: Option<String>)
+
+pub fn execute(options: Options, shell: &mut MultiShell) -> CliResult<Option<()>> {
+ shell.set_verbose(options.flag_verbose);
+ let root = try!(find_root_manifest_for_cwd(options.flag_manifest_path));
+ try!(ops::fetch(&root, shell).map_err(|e| {
+ CliError::from_boxed(e, 101)
+ }));
+ Ok(None)
+}
+
+
//!
use std::os;
-use std::collections::{HashMap, HashSet};
+use std::collections::HashMap;
use core::registry::PackageRegistry;
use core::{MultiShell, Source, SourceId, PackageSet, Target, PackageId};
-use core::{Package, Summary, Resolve, resolver};
+use core::resolver;
use ops;
use sources::{PathSource};
use util::config::{Config, ConfigValue};
manifest_path.dir_path()));
let (packages, resolve_with_overrides, sources) = {
- let _p = profile::start("resolving...");
- let lockfile = manifest_path.dir_path().join("Cargo.lock");
- let source_id = package.get_package_id().get_source_id();
-
let mut config = try!(Config::new(*shell, jobs, target.clone()));
let mut registry = PackageRegistry::new(&mut config);
- let dependencies = package.get_dependencies().iter().filter(|dep| {
- dep.is_transitive() || dev_deps
- }).map(|d| d.clone()).collect::<Vec<_>>();
- match try!(ops::load_lockfile(&lockfile, source_id)) {
- Some(r) => try!(add_lockfile_sources(&mut registry, &package, &r)),
- None => try!(registry.add_sources(package.get_source_ids())),
- }
+ // First, resolve the package's *listed* dependencies, as well as
+ // downloading and updating all remotes and such.
+ try!(ops::resolve_and_fetch(&mut registry, &package));
- let resolved = try!(resolver::resolve(package.get_package_id(),
- package.get_dependencies(),
- &mut registry));
- try!(ops::write_resolve(&package, &resolved));
+ // Second, resolve with precisely what we're doing. Filter out
+ // transitive dependencies if necessary, specify features, handle
+ // overrides, etc.
+ let _p = profile::start("resolving w/ overrides...");
+ let dependencies = package.get_dependencies().iter().filter(|dep| {
+ dep.is_transitive() || dev_deps
+ }).map(|d| d.clone()).collect::<Vec<_>>();
try!(registry.add_overrides(override_ids));
let resolved_with_overrides =
try!(resolver::resolve(package.get_package_id(),
Ok(())
}
-
-/// When a lockfile is present, we want to keep as many dependencies at their
-/// original revision as possible. We need to account, however, for
-/// modifications to the manifest in terms of modifying, adding, or deleting
-/// dependencies.
-///
-/// This method will add any appropriate sources from the lockfile into the
-/// registry, and add all other sources from the root package to the registry.
-/// Any dependency which has not been modified has its source added to the
-/// registry (to retain the precise field if possible). Any dependency which
-/// *has* changed has its source id listed in the manifest added and all of its
-/// transitive dependencies are blacklisted to not be added from the lockfile.
-///
-/// TODO: this won't work too well for registry-based packages, but we don't
-/// have many of those anyway so we should be ok for now.
-fn add_lockfile_sources(registry: &mut PackageRegistry,
- root: &Package,
- resolve: &Resolve) -> CargoResult<()> {
- let deps = resolve.deps(root.get_package_id()).move_iter().flat_map(|deps| {
- deps.map(|d| (d.get_name(), d))
- }).collect::<HashMap<_, _>>();
-
- let mut sources = vec![root.get_package_id().get_source_id().clone()];
- let mut to_avoid = HashSet::new();
- let mut to_add = HashSet::new();
- for dep in root.get_dependencies().iter() {
- match deps.find(&dep.get_name()) {
- Some(&lockfile_dep) => {
- let summary = Summary::new(lockfile_dep, []);
- if dep.matches(&summary) {
- fill_with_deps(resolve, lockfile_dep, &mut to_add);
- } else {
- fill_with_deps(resolve, lockfile_dep, &mut to_avoid);
- sources.push(dep.get_source_id().clone());
- }
- }
- None => sources.push(dep.get_source_id().clone()),
- }
- }
-
- // Only afterward once we know the entire blacklist are the lockfile
- // sources added.
- for addition in to_add.iter() {
- if !to_avoid.contains(addition) {
- sources.push(addition.get_source_id().clone());
- }
- }
-
- return registry.add_sources(sources);
-
- fn fill_with_deps<'a>(resolve: &'a Resolve, dep: &'a PackageId,
- set: &mut HashSet<&'a PackageId>) {
- if !set.insert(dep) { return }
- for mut deps in resolve.deps(dep).move_iter() {
- for dep in deps {
- fill_with_deps(resolve, dep, set);
- }
- }
- }
-}
--- /dev/null
+use std::collections::{HashSet, HashMap};
+
+use core::{MultiShell, Package, PackageId, Summary};
+use core::registry::PackageRegistry;
+use core::resolver::{mod, Resolve};
+use core::source::Source;
+use ops;
+use sources::PathSource;
+use util::{CargoResult, Config};
+use util::profile;
+
+pub fn fetch(manifest_path: &Path,
+ shell: &mut MultiShell) -> CargoResult<()> {
+ let mut source = try!(PathSource::for_path(&manifest_path.dir_path()));
+ try!(source.update());
+ let package = try!(source.get_root_package());
+
+ let mut config = try!(Config::new(shell, None, None));
+ let mut registry = PackageRegistry::new(&mut config);
+ try!(resolve_and_fetch(&mut registry, &package));
+ Ok(())
+}
+
+pub fn resolve_and_fetch(registry: &mut PackageRegistry, package: &Package)
+ -> CargoResult<Resolve> {
+ let _p = profile::start("resolve and fetch...");
+
+ let lockfile = package.get_manifest_path().dir_path().join("Cargo.lock");
+ let source_id = package.get_package_id().get_source_id();
+ match try!(ops::load_lockfile(&lockfile, source_id)) {
+ Some(r) => try!(add_lockfile_sources(registry, package, &r)),
+ None => try!(registry.add_sources(package.get_source_ids())),
+ }
+
+ let resolved = try!(resolver::resolve(package.get_package_id(),
+ package.get_dependencies(),
+ registry));
+ try!(ops::write_resolve(package, &resolved));
+ Ok(resolved)
+}
+
+/// When a lockfile is present, we want to keep as many dependencies at their
+/// original revision as possible. We need to account, however, for
+/// modifications to the manifest in terms of modifying, adding, or deleting
+/// dependencies.
+///
+/// This method will add any appropriate sources from the lockfile into the
+/// registry, and add all other sources from the root package to the registry.
+/// Any dependency which has not been modified has its source added to the
+/// registry (to retain the precise field if possible). Any dependency which
+/// *has* changed has its source id listed in the manifest added and all of its
+/// transitive dependencies are blacklisted to not be added from the lockfile.
+///
+/// TODO: this won't work too well for registry-based packages, but we don't
+/// have many of those anyway so we should be ok for now.
+fn add_lockfile_sources(registry: &mut PackageRegistry,
+ root: &Package,
+ resolve: &Resolve) -> CargoResult<()> {
+ let deps = resolve.deps(root.get_package_id()).move_iter().flat_map(|deps| {
+ deps.map(|d| (d.get_name(), d))
+ }).collect::<HashMap<_, _>>();
+
+ let mut sources = vec![root.get_package_id().get_source_id().clone()];
+ let mut to_avoid = HashSet::new();
+ let mut to_add = HashSet::new();
+ for dep in root.get_dependencies().iter() {
+ match deps.find(&dep.get_name()) {
+ Some(&lockfile_dep) => {
+ let summary = Summary::new(lockfile_dep, []);
+ if dep.matches(&summary) {
+ fill_with_deps(resolve, lockfile_dep, &mut to_add);
+ } else {
+ fill_with_deps(resolve, lockfile_dep, &mut to_avoid);
+ sources.push(dep.get_source_id().clone());
+ }
+ }
+ None => sources.push(dep.get_source_id().clone()),
+ }
+ }
+
+ // Only afterward once we know the entire blacklist are the lockfile
+ // sources added.
+ for addition in to_add.iter() {
+ if !to_avoid.contains(addition) {
+ sources.push(addition.get_source_id().clone());
+ }
+ }
+
+ return registry.add_sources(sources);
+
+ fn fill_with_deps<'a>(resolve: &'a Resolve, dep: &'a PackageId,
+ set: &mut HashSet<&'a PackageId>) {
+ if !set.insert(dep) { return }
+ for mut deps in resolve.deps(dep).move_iter() {
+ for dep in deps {
+ fill_with_deps(resolve, dep, set);
+ }
+ }
+ }
+}
pub fn generate_lockfile(manifest_path: &Path,
shell: &mut MultiShell)
-> CargoResult<()> {
-
- log!(4, "compile; manifest-path={}", manifest_path.display());
-
let mut source = try!(PathSource::for_path(&manifest_path.dir_path()));
try!(source.update());
-
- // TODO: Move this into PathSource
let package = try!(source.get_root_package());
- debug!("loaded package; package={}", package);
-
let source_ids = package.get_source_ids();
+ let mut config = try!(Config::new(shell, None, None));
let resolve = {
- let mut config = try!(Config::new(shell, None, None));
-
let mut registry = PackageRegistry::new(&mut config);
try!(registry.add_sources(source_ids));
try!(resolver::resolve(package.get_package_id(),
package.get_dependencies(),
&mut registry))
};
-
try!(write_resolve(&package, &resolve));
Ok(())
}
pub use self::cargo_package::package;
pub use self::cargo_upload::{upload, upload_configuration, UploadConfig};
pub use self::cargo_upload::{upload_login, http_proxy, http_handle};
+pub use self::cargo_fetch::{fetch, resolve_and_fetch};
mod cargo_clean;
mod cargo_compile;
mod cargo_test;
mod cargo_package;
mod cargo_upload;
+mod cargo_fetch;
execs().with_stdout("1\n"));
})
+test!(fetch_downloads {
+ let bar = git_repo("bar", |project| {
+ project.file("Cargo.toml", r#"
+ [package]
+ name = "bar"
+ version = "0.5.0"
+ authors = ["wycats@example.com"]
+ "#)
+ .file("src/lib.rs", "pub fn bar() -> int { 1 }")
+ }).assert();
+
+ let p = project("p1")
+ .file("Cargo.toml", format!(r#"
+ [project]
+ name = "p1"
+ version = "0.5.0"
+ authors = []
+ [dependencies.bar]
+ git = '{}'
+ "#, bar.url()).as_slice())
+ .file("src/main.rs", "fn main() {}");
+ assert_that(p.cargo_process("fetch"),
+ execs().with_status(0).with_stdout(format!("\
+{updating} git repository `{url}`
+", updating = UPDATING, url = bar.url())));
+
+ assert_that(p.process(cargo_dir().join("cargo")).arg("fetch"),
+ execs().with_status(0).with_stdout(""));
+})
--- /dev/null
+use support::{project, execs};
+use hamcrest::assert_that;
+
+fn setup() {}
+
+test!(no_deps {
+ let p = project("foo")
+ .file("Cargo.toml", r#"
+ [package]
+ name = "foo"
+ authors = []
+ version = "0.0.1"
+ "#)
+ .file("src/main.rs", r#"
+ mod a; fn main() {}
+ "#)
+ .file("src/a.rs", "");
+
+ assert_that(p.cargo_process("fetch"),
+ execs().with_status(0).with_stdout(""));
+})
mod test_cargo_build_auth;
mod test_cargo_registry;
mod test_cargo_upload;
+mod test_cargo_fetch;